pub trait Registry {
/// Attempt to find the packages that match a dependency request.
fn query(&mut self, name: &Dependency) -> CargoResult<Vec<Summary>>;
+
+ /// Returns whether or not this registry will return summaries with
+ /// checksums listed.
+ ///
+ /// By default, registries do not support checksums.
+ fn supports_checksums(&self) -> bool {
+ false
+ }
}
impl Registry for Vec<Summary> {
use std::collections::{HashMap, BTreeMap};
+use std::fmt;
+use std::str::FromStr;
use regex::Regex;
use rustc_serialize::{Encodable, Encoder, Decodable, Decoder};
use core::{Package, PackageId, SourceId, Workspace};
-use util::{CargoResult, Graph, Config};
+use util::{CargoResult, Graph, Config, internal, ChainError, CargoError};
use super::Resolve;
pub type Metadata = BTreeMap<String, String>;
impl EncodableResolve {
- pub fn to_resolve(&self, ws: &Workspace) -> CargoResult<Resolve> {
+ pub fn to_resolve(self, ws: &Workspace) -> CargoResult<Resolve> {
let path_deps = build_path_deps(ws);
let default = try!(ws.current()).package_id().source_id();
try!(add_dependencies(id, pkg));
}
}
+ let mut metadata = self.metadata.unwrap_or(BTreeMap::new());
+
+ // Parse out all package checksums. After we do this we can be in a few
+ // situations:
+ //
+ // * We parsed no checksums. In this situation we're dealing with an old
+ // lock file and we're gonna fill them all in.
+ // * We parsed some checksums, but not one for all packages listed. It
+ // could have been the case that some were listed, then an older Cargo
+ // client added more dependencies, and now we're going to fill in the
+ // missing ones.
+ // * There are too many checksums listed, indicative of an older Cargo
+ // client removing a package but not updating the checksums listed.
+ //
+ // In all of these situations they're part of normal usage, so we don't
+ // really worry about it. We just try to slurp up as many checksums as
+ // possible.
+ let mut checksums = HashMap::new();
+ let prefix = "checksum ";
+ let mut to_remove = Vec::new();
+ for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) {
+ to_remove.push(k.to_string());
+ let k = &k[prefix.len()..];
+ let id: EncodablePackageId = try!(k.parse().chain_error(|| {
+ internal("invalid encoding of checksum in lockfile")
+ }));
+ let id = try!(to_package_id(&id.name,
+ &id.version,
+ id.source.as_ref(),
+ default,
+ &path_deps));
+ let v = if v == "<none>" {
+ None
+ } else {
+ Some(v.to_string())
+ };
+ checksums.insert(id, v);
+ }
+
+ for k in to_remove {
+ metadata.remove(&k);
+ }
Ok(Resolve {
graph: g,
root: root,
features: HashMap::new(),
- metadata: self.metadata.clone(),
replacements: replacements,
+ checksums: checksums,
+ metadata: metadata,
})
}
}
source: Option<SourceId>
}
-impl Encodable for EncodablePackageId {
- fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
- let mut out = format!("{} {}", self.name, self.version);
+impl fmt::Display for EncodablePackageId {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ try!(write!(f, "{} {}", self.name, self.version));
if let Some(ref s) = self.source {
- out.push_str(&format!(" ({})", s.to_url()));
+ try!(write!(f, " ({})", s.to_url()));
}
- out.encode(s)
+ Ok(())
}
}
-impl Decodable for EncodablePackageId {
- fn decode<D: Decoder>(d: &mut D) -> Result<EncodablePackageId, D::Error> {
- let string: String = try!(Decodable::decode(d));
+impl FromStr for EncodablePackageId {
+ type Err = Box<CargoError>;
+
+ fn from_str(s: &str) -> CargoResult<EncodablePackageId> {
let regex = Regex::new(r"^([^ ]+) ([^ ]+)(?: \(([^\)]+)\))?$").unwrap();
- let captures = try!(regex.captures(&string).ok_or_else(|| {
- d.error("invalid serialized PackageId")
+ let captures = try!(regex.captures(s).ok_or_else(|| {
+ internal("invalid serialized PackageId")
}));
let name = captures.at(1).unwrap();
let version = captures.at(2).unwrap();
let source_id = match captures.at(3) {
- Some(s) => {
- Some(try!(SourceId::from_url(s).map_err(|e| {
- d.error(&e.to_string())
- })))
- }
+ Some(s) => Some(try!(SourceId::from_url(s))),
None => None,
};
}
}
+impl Encodable for EncodablePackageId {
+ fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
+ self.to_string().encode(s)
+ }
+}
+
+impl Decodable for EncodablePackageId {
+ fn decode<D: Decoder>(d: &mut D) -> Result<EncodablePackageId, D::Error> {
+ String::decode(d).and_then(|string| {
+ string.parse::<EncodablePackageId>()
+ .map_err(|e| d.error(&e.to_string()))
+ })
+ }
+}
+
pub struct WorkspaceResolve<'a, 'cfg: 'a> {
pub ws: &'a Workspace<'cfg>,
pub resolve: &'a Resolve,
}
Some(encodable_resolve_node(id, self.resolve))
- }).collect::<Vec<EncodableDependency>>();
+ }).collect::<Vec<_>>();
+
+ let mut metadata = self.resolve.metadata.clone();
+
+ for id in ids.iter().filter(|id| !id.source_id().is_path()) {
+ let checksum = match self.resolve.checksums[*id] {
+ Some(ref s) => &s[..],
+ None => "<none>",
+ };
+ let id = encodable_package_id(id);
+ metadata.insert(format!("checksum {}", id.to_string()),
+ checksum.to_string());
+ }
+ let metadata = if metadata.len() == 0 {None} else {Some(metadata)};
EncodableResolve {
package: Some(encodable),
root: encodable_resolve_node(&root, self.resolve),
- metadata: self.resolve.metadata.clone(),
+ metadata: metadata,
}.encode(s)
}
}
//! over the place.
use std::cmp::Ordering;
-use std::collections::{HashSet, HashMap, BinaryHeap};
+use std::collections::{HashSet, HashMap, BinaryHeap, BTreeMap};
use std::fmt;
use std::ops::Range;
use std::rc::Rc;
graph: Graph<PackageId>,
replacements: HashMap<PackageId, PackageId>,
features: HashMap<PackageId, HashSet<String>>,
+ checksums: HashMap<PackageId, Option<String>>,
root: PackageId,
- metadata: Option<Metadata>,
+ metadata: Metadata,
}
pub struct Deps<'a> {
}
impl Resolve {
- fn new(root: PackageId) -> Resolve {
- let mut g = Graph::new();
- g.add(root.clone(), &[]);
- Resolve {
- graph: g,
- root: root,
- replacements: HashMap::new(),
- features: HashMap::new(),
- metadata: None,
+ pub fn merge_from(&mut self, previous: &Resolve) -> CargoResult<()> {
+ // Given a previous instance of resolve, it should be forbidden to ever
+ // have a checksums which *differ*. If the same package id has differing
+ // checksums, then something has gone wrong such as:
+ //
+ // * Something got seriously corrupted
+ // * A "mirror" isn't actually a mirror as some changes were made
+ // * A replacement source wasn't actually a replacment, some changes
+ // were made
+ //
+ // In all of these cases, we want to report an error to indicate that
+ // something is awry. Normal execution (esp just using crates.io) should
+ // never run into this.
+ for (id, cksum) in previous.checksums.iter() {
+ if let Some(mine) = self.checksums.get(id) {
+ if mine == cksum {
+ continue
+ }
+
+ // If the previous checksum wasn't calculated, the current
+ // checksum is `Some`. This may indicate that a source was
+ // erroneously replaced or was replaced with something that
+ // desires stronger checksum guarantees than can be afforded
+ // elsewhere.
+ if cksum.is_none() {
+ bail!("\
+checksum for `{}` was not previously calculated, but a checksum could now \
+be calculated
+
+this could be indicative of a few possible situations:
+
+ * the source `{}` did not previously support checksums,
+ but was replaced with one that does
+ * newer Cargo implementations know how to checksum this source, but this
+ older implementation does not
+ * the lock file is corrupt
+", id, id.source_id())
+
+ // If our checksum hasn't been calculated, then it could mean
+ // that future Cargo figured out how to checksum something or
+ // more realistically we were overridden with a source that does
+ // not have checksums.
+ } else if mine.is_none() {
+ bail!("\
+checksum for `{}` could not be calculated, but a checksum is listed in \
+the existing lock file
+
+this could be indicative of a few possible situations:
+
+ * the source `{}` supports checksums,
+ but was replaced with one that doesn't
+ * the lock file is corrupt
+
+unable to verify that `{0}` was the same as before in either situation
+", id, id.source_id())
+
+ // If the checksums aren't equal, and neither is None, then they
+ // must both be Some, in which case the checksum now differs.
+ // That's quite bad!
+ } else {
+ bail!("\
+checksum for `{}` changed between lock files
+
+this could be indicative of a few possible errors:
+
+ * the lock file is corrupt
+ * a replacement source in use (e.g. a mirror) returned a different checksum
+ * the source itself may be corrupt in one way or another
+
+unable to verify that `{0}` was the same as before in any situation
+", id);
+ }
+ }
}
- }
- pub fn copy_metadata(&mut self, other: &Resolve) {
- self.metadata = other.metadata.clone();
+ // Be sure to just copy over any unknown metadata.
+ self.metadata = previous.metadata.clone();
+ Ok(())
}
pub fn iter(&self) -> Nodes<PackageId> {
self.graph.iter()
}
- pub fn root(&self) -> &PackageId { &self.root }
+ pub fn root(&self) -> &PackageId {
+ &self.root
+ }
pub fn deps(&self, pkg: &PackageId) -> Deps {
Deps { edges: self.graph.edges(pkg), resolve: self }
#[derive(Clone)]
struct Context<'a> {
activations: HashMap<(String, SourceId), Vec<Rc<Summary>>>,
- resolve: Resolve,
+ resolve_graph: Graph<PackageId>,
+ resolve_features: HashMap<PackageId, HashSet<String>>,
+ resolve_replacements: HashMap<PackageId, PackageId>,
replacements: &'a [(PackageIdSpec, Dependency)],
}
replacements: &[(PackageIdSpec, Dependency)],
registry: &mut Registry) -> CargoResult<Resolve> {
let cx = Context {
- resolve: Resolve::new(root.clone()),
+ resolve_graph: Graph::new(),
+ resolve_features: HashMap::new(),
+ resolve_replacements: HashMap::new(),
activations: HashMap::new(),
replacements: replacements,
};
let _p = profile::start(format!("resolving: {}", root));
let cx = try!(activate_deps_loop(cx, registry, summaries));
- try!(check_cycles(&cx));
- Ok(cx.resolve)
+
+ let mut resolve = Resolve {
+ graph: cx.resolve_graph,
+ features: cx.resolve_features,
+ root: root.clone(),
+ checksums: HashMap::new(),
+ metadata: BTreeMap::new(),
+ replacements: cx.resolve_replacements,
+ };
+
+ for summary in cx.activations.values().flat_map(|v| v.iter()) {
+ let cksum = summary.checksum().map(|s| s.to_string());
+ resolve.checksums.insert(summary.package_id().clone(), cksum);
+ }
+
+ try!(check_cycles(&resolve, &cx.activations));
+
+ trace!("resolved: {:?}", resolve);
+ Ok(resolve)
}
/// Attempts to activate the summary `candidate` in the context `cx`.
method: &Method)
-> CargoResult<Option<DepsFrame>> {
if let Some(parent) = parent {
- cx.resolve.graph.link(parent.package_id().clone(),
+ cx.resolve_graph.link(parent.package_id().clone(),
candidate.summary.package_id().clone());
}
let candidate = match candidate.replace {
Some(replace) => {
- cx.resolve.replacements.insert(candidate.summary.package_id().clone(),
+ cx.resolve_replacements.insert(candidate.summary.package_id().clone(),
replace.package_id().clone());
if cx.flag_activated(&replace, method) {
return Ok(None);
remaining_deps.extend(try!(activate(&mut cx, registry, Some(&parent),
candidate, &method)));
}
- trace!("resolved: {:?}", cx.resolve);
+
Ok(cx)
}
dep.name(), parent.name(),
dep.name());
'outer: for v in prev_active.iter() {
- for node in cx.resolve.graph.iter() {
- let edges = match cx.resolve.graph.edges(node) {
+ for node in cx.resolve_graph.iter() {
+ let edges = match cx.resolve_graph.edges(node) {
Some(edges) => edges,
None => continue,
};
let key = (id.name().to_string(), id.source_id().clone());
let prev = self.activations.entry(key).or_insert(Vec::new());
if !prev.iter().any(|c| c == summary) {
- self.resolve.graph.add(id.clone(), &[]);
+ self.resolve_graph.add(id.clone(), &[]);
prev.push(summary.clone());
return false
}
};
let has_default_feature = summary.features().contains_key("default");
- match self.resolve.features(id) {
+ match self.resolve_features.get(id) {
Some(prev) => {
features.iter().all(|f| prev.contains(f)) &&
(!use_default || prev.contains("default") ||
// Record what list of features is active for this package.
if !used_features.is_empty() {
let pkgid = candidate.package_id();
- self.resolve.features.entry(pkgid.clone())
+ self.resolve_features.entry(pkgid.clone())
.or_insert(HashSet::new())
.extend(used_features);
}
}
}
-fn check_cycles(cx: &Context) -> CargoResult<()> {
+fn check_cycles(resolve: &Resolve,
+ activations: &HashMap<(String, SourceId), Vec<Rc<Summary>>>)
+ -> CargoResult<()> {
let mut summaries = HashMap::new();
- for summary in cx.activations.values().flat_map(|v| v) {
+ for summary in activations.values().flat_map(|v| v) {
summaries.insert(summary.package_id(), &**summary);
}
- return visit(&cx.resolve,
- cx.resolve.root(),
+ return visit(resolve,
+ resolve.root(),
&summaries,
&mut HashSet::new(),
&mut HashSet::new());
package_id: PackageId,
dependencies: Vec<Dependency>,
features: HashMap<String, Vec<String>>,
+ checksum: Option<String>,
}
impl Summary {
package_id: pkg_id,
dependencies: dependencies,
features: features,
+ checksum: None,
})
}
pub fn source_id(&self) -> &SourceId { self.package_id.source_id() }
pub fn dependencies(&self) -> &[Dependency] { &self.dependencies }
pub fn features(&self) -> &HashMap<String, Vec<String>> { &self.features }
+ pub fn checksum(&self) -> Option<&str> {
+ self.checksum.as_ref().map(|s| &s[..])
+ }
pub fn override_id(mut self, id: PackageId) -> Summary {
self.package_id = id;
self
}
+ pub fn set_checksum(mut self, cksum: String) -> Summary {
+ self.checksum = Some(cksum);
+ self
+ }
+
pub fn map_dependencies<F>(mut self, f: F) -> Summary
where F: FnMut(Dependency) -> Dependency {
let deps = mem::replace(&mut self.dependencies, Vec::new());
&replace,
registry));
if let Some(previous) = previous {
- resolved.copy_metadata(previous);
+ try!(resolved.merge_from(previous));
}
return Ok(resolved);
};
let mut path = Path::new("/");
let orig_name = name;
+ let new_id;
loop {
let cfg = match self.cfgs.get(name) {
Some(cfg) => cfg,
}
None if *id == cfg.id => return Ok(id.load(self.config)),
None => {
- let new_id = cfg.id.with_precise(id.precise()
- .map(|s| s.to_string()));
- let src = new_id.load(self.config);
- return Ok(Box::new(ReplacedSource::new(id, &new_id, src)))
+ new_id = cfg.id.with_precise(id.precise()
+ .map(|s| s.to_string()));
+ break
}
}
debug!("following pointer to {}", name);
(configuration in `{}`)", name, path.display())
}
}
+ let new_src = new_id.load(self.config);
+ let old_src = id.load(self.config);
+ if new_src.supports_checksums() != old_src.supports_checksums() {
+ let (supports, no_support) = if new_src.supports_checksums() {
+ (name, orig_name)
+ } else {
+ (orig_name, name)
+ };
+ bail!("\
+cannot replace `{orig}` with `{name}`, the source `{supports}` supports \
+checksums, but `{no_support}` does not
+
+a lock file compatible with `{orig}` cannot be generated in this situation
+", orig = orig_name, name = name, supports = supports, no_support = no_support);
+ }
+ Ok(Box::new(ReplacedSource::new(id, &new_id, new_src)))
}
fn add(&mut self, name: &str, cfg: SourceConfig) {
self.parse_registry_dependency(dep)
}).collect();
let deps = try!(deps);
+ let summary = try!(Summary::new(pkgid, deps, features));
+ let summary = summary.set_checksum(cksum.clone());
self.hashes.insert((name, vers), cksum);
- Ok((try!(Summary::new(pkgid, deps, features)), yanked.unwrap_or(false)))
+ Ok((summary, yanked.unwrap_or(false)))
}
/// Converts an encoded dependency in the registry to a cargo dependency
});
summaries.query(dep)
}
+
+ fn supports_checksums(&self) -> bool {
+ true
+ }
}
impl<'cfg> Source for RegistrySource<'cfg> {
features: Vec<String>,
}
-fn init() {
+pub fn init() {
let config = paths::home().join(".cargo/config");
t!(fs::create_dir_all(config.parent().unwrap()));
if fs::metadata(&config).is_ok() {
--- /dev/null
+#[macro_use]
+extern crate cargotest;
+extern crate hamcrest;
+
+use std::fs::File;
+use std::io::prelude::*;
+
+use cargotest::support::git;
+use cargotest::support::registry::Package;
+use cargotest::support::{execs, project};
+use hamcrest::assert_that;
+
+#[test]
+fn oldest_lockfile_still_works() {
+ Package::new("foo", "0.1.0").publish();
+
+ let p = project("bar")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ foo = "0.1.0"
+ "#)
+ .file("src/lib.rs", "");
+ p.build();
+
+ let lockfile = r#"
+[root]
+name = "bar"
+version = "0.0.1"
+dependencies = [
+ "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "foo"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+"#;
+ File::create(p.root().join("Cargo.lock")).unwrap()
+ .write_all(lockfile.as_bytes()).unwrap();
+
+ assert_that(p.cargo("build"),
+ execs().with_status(0));
+
+ let mut lock = String::new();
+ File::open(p.root().join("Cargo.lock")).unwrap()
+ .read_to_string(&mut lock).unwrap();
+ assert!(lock.starts_with(lockfile.trim()));
+}
+
+#[test]
+fn totally_wild_checksums_works() {
+ Package::new("foo", "0.1.0").publish();
+
+ let p = project("bar")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ foo = "0.1.0"
+ "#)
+ .file("src/lib.rs", "");
+ p.build();
+
+ File::create(p.root().join("Cargo.lock")).unwrap().write_all(br#"
+[root]
+name = "bar"
+version = "0.0.1"
+dependencies = [
+ "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "foo"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[metadata]
+"checksum baz 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum"
+"checksum foo 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum"
+"#).unwrap();
+
+ assert_that(p.cargo("build"),
+ execs().with_status(0));
+
+ let mut lock = String::new();
+ File::open(p.root().join("Cargo.lock")).unwrap()
+ .read_to_string(&mut lock).unwrap();
+ assert!(lock.starts_with(r#"
+[root]
+name = "bar"
+version = "0.0.1"
+dependencies = [
+ "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "foo"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[metadata]
+"#.trim()));
+}
+
+#[test]
+fn wrong_checksum_is_an_error() {
+ Package::new("foo", "0.1.0").publish();
+
+ let p = project("bar")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ foo = "0.1.0"
+ "#)
+ .file("src/lib.rs", "");
+ p.build();
+
+ t!(t!(File::create(p.root().join("Cargo.lock"))).write_all(br#"
+[root]
+name = "bar"
+version = "0.0.1"
+dependencies = [
+ "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "foo"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[metadata]
+"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum"
+"#));
+
+ assert_that(p.cargo("build"),
+ execs().with_status(101).with_stderr("\
+[UPDATING] registry `[..]`
+error: checksum for `foo v0.1.0` changed between lock files
+
+this could be indicative of a few possible errors:
+
+ * the lock file is corrupt
+ * a replacement source in use (e.g. a mirror) returned a different checksum
+ * the source itself may be corrupt in one way or another
+
+unable to verify that `foo v0.1.0` was the same as before in any situation
+
+"));
+}
+
+// If the checksum is unlisted in the lockfile (e.g. <none>) yet we can
+// calculate it (e.g. it's a registry dep), then we should in theory just fill
+// it in.
+#[test]
+fn unlisted_checksum_is_bad_if_we_calculate() {
+ Package::new("foo", "0.1.0").publish();
+
+ let p = project("bar")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ foo = "0.1.0"
+ "#)
+ .file("src/lib.rs", "");
+ p.build();
+
+ t!(t!(File::create(p.root().join("Cargo.lock"))).write_all(br#"
+[root]
+name = "bar"
+version = "0.0.1"
+dependencies = [
+ "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "foo"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[metadata]
+"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "<none>"
+"#));
+
+ assert_that(p.cargo("fetch"),
+ execs().with_status(101).with_stderr("\
+[UPDATING] registry `[..]`
+error: checksum for `foo v0.1.0` was not previously calculated, but a checksum \
+could now be calculated
+
+this could be indicative of a few possible situations:
+
+ * the source `[..]` did not previously support checksums,
+ but was replaced with one that does
+ * newer Cargo implementations know how to checksum this source, but this
+ older implementation does not
+ * the lock file is corrupt
+
+"));
+}
+
+// If the checksum is listed in the lockfile yet we cannot calculate it (e.g.
+// git dependencies as of today), then make sure we choke.
+#[test]
+fn listed_checksum_bad_if_we_cannot_compute() {
+ let git = git::new("foo", |p| {
+ p.file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("src/lib.rs", "")
+ }).unwrap();
+
+ let p = project("bar")
+ .file("Cargo.toml", &format!(r#"
+ [project]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ foo = {{ git = '{}' }}
+ "#, git.url()))
+ .file("src/lib.rs", "");
+ p.build();
+
+ let lockfile = format!(r#"
+[root]
+name = "bar"
+version = "0.0.1"
+dependencies = [
+ "foo 0.1.0 (git+{0})"
+]
+
+[[package]]
+name = "foo"
+version = "0.1.0"
+source = "git+{0}"
+
+[metadata]
+"checksum foo 0.1.0 (git+{0})" = "checksum"
+"#, git.url());
+ File::create(p.root().join("Cargo.lock")).unwrap()
+ .write_all(lockfile.as_bytes()).unwrap();
+
+ assert_that(p.cargo("fetch"),
+ execs().with_status(101).with_stderr("\
+[UPDATING] git repository `[..]`
+error: checksum for `foo v0.1.0 ([..])` could not be calculated, but a \
+checksum is listed in the existing lock file[..]
+
+this could be indicative of a few possible situations:
+
+ * the source `[..]` supports checksums,
+ but was replaced with one that doesn't
+ * the lock file is corrupt
+
+unable to verify that `foo v0.1.0 ([..])` was the same as before in either situation
+
+"));
+}
#[test]
fn update_publish_then_update() {
+ // First generate a Cargo.lock and a clone of the registry index at the
+ // "head" of the current registry.
let p = project("foo")
.file("Cargo.toml", r#"
[project]
assert_that(p.cargo("build"),
execs().with_status(0));
+ // Next, publish a new package and back up the copy of the registry we just
+ // created.
Package::new("a", "0.1.1").publish();
+ let registry = paths::home().join(".cargo/registry");
+ let backup = paths::root().join("registry-backup");
+ fs::rename(®istry, &backup).unwrap();
- let lock = p.root().join("Cargo.lock");
- let mut s = String::new();
- File::open(&lock).unwrap().read_to_string(&mut s).unwrap();
- File::create(&lock).unwrap()
- .write_all(s.replace("0.1.0", "0.1.1").as_bytes()).unwrap();
- println!("second");
+ // Generate a Cargo.lock with the newer version, and then move the old copy
+ // of the registry back into place.
+ let p2 = project("foo2")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ a = "0.1.1"
+ "#)
+ .file("src/main.rs", "fn main() {}");
+ assert_that(p2.cargo_process("build"),
+ execs().with_status(0));
+ fs::remove_dir_all(®istry).unwrap();
+ fs::rename(&backup, ®istry).unwrap();
+ fs::rename(p2.root().join("Cargo.lock"), p.root().join("Cargo.lock")).unwrap();
- fs::remove_dir_all(&p.root().join("target")).unwrap();
+ // Finally, build the first project again (with our newer Cargo.lock) which
+ // should force an update of the old registry, download the new crate, and
+ // then build everything again.
assert_that(p.cargo("build"),
execs().with_status(0).with_stderr(&format!("\
[UPDATING] [..]